In [1]:
# simple lti routines in this module
include("lti.jl")
using LTI
# plotting module
using PyPlot


Vendor:  Continuum Analytics, Inc.
Package: mkl
Message: trial mode expires in 25 days
INFO: Loading help data...

Dynamical system parameters

  • Dynamics matrix, $A$, is $N$ by $N$ with $K$ fast modes and $N - K$ slow modes. $A$ is generated in one of two ways:

    1. Normal, no imaginary eigenvalues by calling: A, _, _ = genDyn(N, K, dFast, dSlowLow, dSlowDiff), in which case $A$ will have $K$ slow eigenvalues drawn uniform randomly between dSlowLow and dSlowLow + dSlowDiff, and $N - K$ eigenvalues at dFast.
    2. Normal, with imaginary eigevalues by calling: A, _, _ = genDynRot(N, K, dFast, dSlowLow, dSlowDiff, omegaScale), in which case $A$ will have $K / 2$ (then their complements) slow eigenvalues whose magnitudes are drawn uniformly randomly between dSlowLow and dSlowLow + dSlowDiff with phases drawn uniformly randomly between -omegaScale and omegaScale. There are then $N - K$ eigenvalues at dFast.
  • Observation matrix, $C$, is $M$ by $N$, and generated by sampling $M$ random rows of the identity matrix.

  • $\Pi$ is the steady state covariance matrix satisfying $A\Pi A^T + Q = \Pi$

Dynamical system evolution

The system's state $x_t$ evolves according to, $$x_{t + 1} = Ax_t + \xi_t,\text{ with }\xi_t \sim \mathcal{N}(0, Q) \\ y_{t} = Cx_{t} + \eta_t,\text{ with }\eta_t \sim \mathcal{N}(0, R) \\ x_1 \sim \mathcal{N}(0, \Pi)$$


In [2]:
# parameters
K = 8; N = 100; dly = 3; r = 0.0;
dFast = 0.1; dSlowLow = 0.69; dSlowDiff = 0.3; omegaScale = pi / 4;
# input and observational noise
Q = eye(N) * 1.0;
# dynamics as well as its eigen-decomposition
A, U, D = genDynRot(N, K, dFast, dSlowLow, dSlowDiff, omegaScale);
# A = [0.912 0.129 0.0308 -0.0028; -0.1064 0.9521 0.1709 -0.1174; -0.0692 -0.1469 0.9258 -0.0659; 0.0384 0.1269 0.0210 0.9125];
# D, U = eig(A)
# stationary covariance and its sqrt for the states
P = dlyap(A, Q); p = real(sqrtm(P));

# one Kalman-Ho trial
function trial(M, T; guessK=K)
    C = genObsSamp(N, M); R = eye(M) * r;
    _, Y = runLTI(A, C, Q, R, int(T), P=P);
    Ap, _ = hoKalman(Y, 3, guessK);
    Dp = eig(Ap)[1];
    Ep = specErr(Dp, D[1:guessK]);
    return Dp, Ep;
end


Out[2]:
trial (generic function with 1 method)

Eigen spectra of sample cross-covariance matrices


In [3]:
function hankelSCovD(T, M; dly=5)
    C = genObsSamp(N, M); R = eye(M) * r;
    _, Y = runLTI(A, C, Q, R, int(T), P=P);
    H = hankel(offset -> xCov(Y, offset), dly);
    _, s, _ = svd(H);
    
    M0 = xCov(Y, 0);
    d, _ = eig(M0);
    
    return s, d;
end


Out[3]:
hankelSCovD (generic function with 1 method)

In [4]:
nTrial = 50;
Ts = logspace(log10(20), log10(5000), 25);
Ms = 4:4:N;
KpHankel = zeros(length(Ms), length(Ts));
KpCov = zeros(length(Ms), length(Ts));

for (kM, M) in enumerate(Ms)
    for (kT, T) in enumerate(Ts)
        S, D = Float64[], Float64[];
        for kTrial in 1:nTrial
            s, d = hankelSCovD(int(T), M)
            S = [S, s]; D = [D, d];
        end
        sort!(S, rev=true); sort!(D, rev=true);
        dSIx = sortperm(diff(S)); dDIx = sortperm(diff(D));
        KpHankel[kM, kT] = int(maximum(dSIx[1:nTrial]) / nTrial);
        KpCov[kM, kT] = int(maximum(dDIx[1:nTrial]) / nTrial);
    end
end

In [5]:
size(KpHankel)


Out[5]:
(25,25)

In [6]:
imshow(KpHankel.==K, interpolation="nearest", aspect="auto")


Out[6]:
PyObject <matplotlib.image.AxesImage object at 0x7fde30044a10>

In [104]:
imshow(KpCov .== K, interpolation="nearest", aspect="auto")


Out[104]:
j -30 -25 -20 -15 -10 -5 0 5 10 15 20 25 30 35 40 45 50 55 -25 -24 -23 -22 -21 -20 -19 -18 -17 -16 -15 -14 -13 -12 -11 -10 -9 -8 -7 -6 -5 -4 -3 -2 -1 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 -25 0 25 50 -26 -24 -22 -20 -18 -16 -14 -12 -10 -8 -6 -4 -2 0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 1.0 value -30 -25 -20 -15 -10 -5 0 5 10 15 20 25 30 35 40 45 50 55 -25 -24 -23 -22 -21 -20 -19 -18 -17 -16 -15 -14 -13 -12 -11 -10 -9 -8 -7 -6 -5 -4 -3 -2 -1 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 -25 0 25 50 -26 -24 -22 -20 -18 -16 -14 -12 -10 -8 -6 -4 -2 0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 i

plot( layer(y=sort(Sh, rev=true)[1:K nTrial 5], x=1:K nTrial 5, Geom.line), layer(xintercept=[K * nTrial], Geom.vline(color="red", size=1mm)))


In [7]:
plot(1:K * nTrial * 5, sort(DM0, rev=true)[1:K * nTrial * 5], "k.-", linewidth=2)
axvline(x=K * nTrial, color="red", linewidth=2)
xlabel("Index"); ylabel("CovarianceEigenvalue")


DM0 not defined
while loading In[7], in expression starting on line 1

In [82]:
maximum(sortperm(diff(sort(Sh, rev=true)))[1:nTrial]) / nTrial


Out[82]:
8.03

In [79]:
plot(y=diff(sort(Sh, rev=true))[1:K * nTrial * 5], x=1:K * nTrial * 5, Geom.line)


Out[79]:
x -5×103 -4×103 -3×103 -2×103 -1×103 0 1×103 2×103 3×103 4×103 5×103 6×103 7×103 8×103 9×103 -4.0×103 -3.8×103 -3.6×103 -3.4×103 -3.2×103 -3.0×103 -2.8×103 -2.6×103 -2.4×103 -2.2×103 -2.0×103 -1.8×103 -1.6×103 -1.4×103 -1.2×103 -1.0×103 -8.0×102 -6.0×102 -4.0×102 -2.0×102 0 2.0×102 4.0×102 6.0×102 8.0×102 1.0×103 1.2×103 1.4×103 1.6×103 1.8×103 2.0×103 2.2×103 2.4×103 2.6×103 2.8×103 3.0×103 3.2×103 3.4×103 3.6×103 3.8×103 4.0×103 4.2×103 4.4×103 4.6×103 4.8×103 5.0×103 5.2×103 5.4×103 5.6×103 5.8×103 6.0×103 6.2×103 6.4×103 6.6×103 6.8×103 7.0×103 7.2×103 7.4×103 7.6×103 7.8×103 8.0×103 -5.0×103 0 5.0×103 1.0×104 -4.0×103 -3.5×103 -3.0×103 -2.5×103 -2.0×103 -1.5×103 -1.0×103 -5.0×102 0 5.0×102 1.0×103 1.5×103 2.0×103 2.5×103 3.0×103 3.5×103 4.0×103 4.5×103 5.0×103 5.5×103 6.0×103 6.5×103 7.0×103 7.5×103 8.0×103 -1.1 -1.0 -0.9 -0.8 -0.7 -0.6 -0.5 -0.4 -0.3 -0.2 -0.1 0.0 0.1 0.2 0.3 0.4 0.5 0.6 -1.00 -0.98 -0.96 -0.94 -0.92 -0.90 -0.88 -0.86 -0.84 -0.82 -0.80 -0.78 -0.76 -0.74 -0.72 -0.70 -0.68 -0.66 -0.64 -0.62 -0.60 -0.58 -0.56 -0.54 -0.52 -0.50 -0.48 -0.46 -0.44 -0.42 -0.40 -0.38 -0.36 -0.34 -0.32 -0.30 -0.28 -0.26 -0.24 -0.22 -0.20 -0.18 -0.16 -0.14 -0.12 -0.10 -0.08 -0.06 -0.04 -0.02 0.00 0.02 0.04 0.06 0.08 0.10 0.12 0.14 0.16 0.18 0.20 0.22 0.24 0.26 0.28 0.30 0.32 0.34 0.36 0.38 0.40 0.42 0.44 0.46 0.48 0.50 -1.0 -0.5 0.0 0.5 -1.00 -0.95 -0.90 -0.85 -0.80 -0.75 -0.70 -0.65 -0.60 -0.55 -0.50 -0.45 -0.40 -0.35 -0.30 -0.25 -0.20 -0.15 -0.10 -0.05 0.00 0.05 0.10 0.15 0.20 0.25 0.30 0.35 0.40 0.45 0.50 y

In [ ]:
function

Spectra of sample cross-covariance

red lines = theoretical. pretty long tail


In [4]:
set_default_plot_size(12cm, 8cm)
plot(layer(x=eigData, Geom.histogram),
     layer(xintercept=1 ./ (1 - D.^2), Geom.vline(color="red")),
     Guide.xlabel("Eigenvalues"), Guide.ylabel("Count"))


Out[4]:
Eigenvalues -20 -15 -10 -5 0 5 10 15 20 25 30 35 -15.0 -14.5 -14.0 -13.5 -13.0 -12.5 -12.0 -11.5 -11.0 -10.5 -10.0 -9.5 -9.0 -8.5 -8.0 -7.5 -7.0 -6.5 -6.0 -5.5 -5.0 -4.5 -4.0 -3.5 -3.0 -2.5 -2.0 -1.5 -1.0 -0.5 0.0 0.5 1.0 1.5 2.0 2.5 3.0 3.5 4.0 4.5 5.0 5.5 6.0 6.5 7.0 7.5 8.0 8.5 9.0 9.5 10.0 10.5 11.0 11.5 12.0 12.5 13.0 13.5 14.0 14.5 15.0 15.5 16.0 16.5 17.0 17.5 18.0 18.5 19.0 19.5 20.0 20.5 21.0 21.5 22.0 22.5 23.0 23.5 24.0 24.5 25.0 25.5 26.0 26.5 27.0 27.5 28.0 28.5 29.0 29.5 30.0 -20 0 20 40 -15 -14 -13 -12 -11 -10 -9 -8 -7 -6 -5 -4 -3 -2 -1 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 -500 -400 -300 -200 -100 0 100 200 300 400 500 600 700 800 900 -400 -380 -360 -340 -320 -300 -280 -260 -240 -220 -200 -180 -160 -140 -120 -100 -80 -60 -40 -20 0 20 40 60 80 100 120 140 160 180 200 220 240 260 280 300 320 340 360 380 400 420 440 460 480 500 520 540 560 580 600 620 640 660 680 700 720 740 760 780 800 -500 0 500 1000 -400 -350 -300 -250 -200 -150 -100 -50 0 50 100 150 200 250 300 350 400 450 500 550 600 650 700 750 800 Count

Spectra of the difference between sample and theoretical cross-covariance


In [6]:
plot(x=eigError, Geom.histogram, Guide.xlabel("Eigenvalues"), Guide.ylabel("Count"))


Out[6]:
Eigenvalues -16 -14 -12 -10 -8 -6 -4 -2 0 2 4 6 8 10 12 14 16 18 -14.0 -13.5 -13.0 -12.5 -12.0 -11.5 -11.0 -10.5 -10.0 -9.5 -9.0 -8.5 -8.0 -7.5 -7.0 -6.5 -6.0 -5.5 -5.0 -4.5 -4.0 -3.5 -3.0 -2.5 -2.0 -1.5 -1.0 -0.5 0.0 0.5 1.0 1.5 2.0 2.5 3.0 3.5 4.0 4.5 5.0 5.5 6.0 6.5 7.0 7.5 8.0 8.5 9.0 9.5 10.0 10.5 11.0 11.5 12.0 12.5 13.0 13.5 14.0 14.5 15.0 15.5 16.0 -20 -10 0 10 20 -14 -13 -12 -11 -10 -9 -8 -7 -6 -5 -4 -3 -2 -1 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 -300 -250 -200 -150 -100 -50 0 50 100 150 200 250 300 350 400 450 500 550 -250 -240 -230 -220 -210 -200 -190 -180 -170 -160 -150 -140 -130 -120 -110 -100 -90 -80 -70 -60 -50 -40 -30 -20 -10 0 10 20 30 40 50 60 70 80 90 100 110 120 130 140 150 160 170 180 190 200 210 220 230 240 250 260 270 280 290 300 310 320 330 340 350 360 370 380 390 400 410 420 430 440 450 460 470 480 490 500 -250 0 250 500 -260 -240 -220 -200 -180 -160 -140 -120 -100 -80 -60 -40 -20 0 20 40 60 80 100 120 140 160 180 200 220 240 260 280 300 320 340 360 380 400 420 440 460 480 500 Count

Max eigenvalues of the sample cross-covariance as well as the sample-theory difference


In [ ]:
nTrial = 200;
nT = 3;
Ts = logspace(2, 5, nT)
eigDataMax = {}
eigErrorMax = {}

for(kT, T) in enumerate(Ts)
    tmp = Float64[]
    for ktrial in 1:nTrial
        print(ktrial)
        
        Y, X = runLTI(A, C, Q, R, int(T), P=P);
        YInd = C * p * randn(N, int(T))
        MtInd = xCov(YInd, dt)
        MtData = xCov(Y, dt);
        MtExact = A^dt * P;

        # deviations
        dMtData = MtData - MtExact;

        # compute eigenvalues
        eError, _ = eig(dMtData)
        tmp = [tmp, maximum(eError)]
    end
    push!(eigErrorMax, tmp)
end

In [ ]:
layers = Layer[];

for (kT, T) in enumerate(Ts)
    e, v = hist(eigErrorMax[kT], 100)
    e = (e[2:end] + e[1:end - 1]) / 2.0
    layers = [layers layer(x=e, y=v, Geom.line)]
end
plot(layers, Guide.xlabel("Max eigenvalue"), Guide.ylabel("Count"))